import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
from nmfd_gnn import NMFD_GNN
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
#1.1: settings
M = 10 #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)
#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16 #batch size for evaluation and test
hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
"beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy}
gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10 #column dimension of L1, L2
c_k = 5.5 #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.879, 5.207, -2.473, 1.722, 3.619]
hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
"p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30
#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic_london/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_10_missing_50
def visualize_train_loss(total_phy_flow_occ_loss):
plt.figure(figsize=(4,3), dpi=75)
t_p_f_o_l = np.array(total_phy_flow_occ_loss)
e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
x = range(len(e_loss))
plt.plot(x, p_loss, linewidth=1, label = "phy loss")
plt.plot(x, f_loss, linewidth=1, label = "flow loss")
plt.plot(x, o_loss, linewidth=1, label = "occ loss")
plt.legend()
plt.title('Loss decline on train')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
plt.show()
def visualize_flow_loss(vali_f_mae, test_f_mae):
plt.figure(figsize=(4,3), dpi=75)
x = range(len(vali_f_mae))
plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
plt.plot(x, test_f_mae, linewidth=1, label="Test")
plt.legend()
plt.title('MAE of flow on validate/test')
plt.xlabel('Epoch')
plt.ylabel('MAE (veh/h)')
plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
plt.show()
def visualize_occ_loss(vali_o_mae, test_o_mae):
plt.figure(figsize=(4,3), dpi=75)
x = range(len(vali_o_mae))
plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
plt.plot(x, test_o_mae, linewidth=1, label="Test")
plt.legend()
plt.title('MAE of occupancy on validate/test')
plt.xlabel('Epoch')
plt.ylabel('MAE')
plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
plt.show()
def MAELoss(yhat, y):
return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))
def RMSELoss(yhat, y):
return float(torch.sqrt(torch.mean((yhat-y)**2)))
def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):
flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
for i in range(0, n, b_s_vt):
s, e = i, np.min([i+b_s_vt, n])
num_list.append(e-s)
bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]
bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
f_mae_list.append(bf_mae)
f_rmse_list.append(bf_rmse)
o_mae_list.append(bo_mae)
o_rmse_list.append(bo_rmse)
f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
return f_mae, f_rmse, o_mae, o_rmse
def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse =\
vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ):
#f: flow; o: occupancy
model.train()
losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
n = len(train_f_x)
print ("# batch: ", int(n/b_s))
for i in range(0, n-b_s, b_s):
time1 = time.time()
x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]
x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]
opt.zero_grad()
y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
p_loss = criterion(q_hat, q_theta).cpu() #physical loss
p_loss = p_loss/flow_std_squ
f_loss = criterion(y_f_hat.cpu(), y_f_batch) #data loss of flow
o_loss = criterion(y_o_hat.cpu(), y_o_batch) #data loss of occupancy
loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
loss.backward()
opt.step()
losses.append(loss.data.numpy())
p_losses.append(p_loss.data.numpy())
f_losses.append(f_loss.data.numpy())
o_losses.append(o_loss.data.numpy())
if i % (64*b_s) == 0:
print ("i_batch: ", i/b_s)
print ("the loss for this batch: ", loss.data.numpy())
print ("flow loss", f_loss.data.numpy())
print ("occ loss", o_loss.data.numpy())
time2 = time.time()
print ("time for this batch", time2-time1)
print ("----------------------------------")
n_loss = float(len(losses)+0.000001)
aver_loss = sum(losses)/n_loss
aver_p_loss = sum(p_losses)/n_loss
aver_f_loss = sum(f_losses)/n_loss
aver_o_loss = sum(o_losses)/n_loss
return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss
#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
total_phy_flow_occ_loss = list()
n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
vali_f, vali_o = vali["flow"], vali["occupancy"]
vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device)
test_f, test_o = test["flow"], test["occupancy"]
test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device)
l_r, n_e = hyper["l_r"], hyper["n_e"]
opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
print ("# epochs ", n_e)
r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
flow_std_squ = np.power(f_o_mean_std[1], 2)
no_decrease = 0
for i in range(n_e):
print ("----------------an epoch starts-------------------")
#time1_s = time.time()
time_s = time.time()
print ("i_epoch: ", i)
n_train = len(train["flow"])
number_list = copy.copy(list(range(n_train)))
random.shuffle(number_list, random = r)
shuffle_idx = torch.tensor(number_list)
train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx]
aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
train_x_o.to(device), train_y_o, hyper, flow_std_squ)
opt_scheduler.step()
total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
print ("train loss for this epoch: ", round(aver_loss, 6))
#evaluate
b_s_vt = hyper["b_s_vt"]
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)
r_vali_f_mae.append(vali_f_mae)
r_test_f_mae.append(test_f_mae)
r_vali_o_mae.append(vali_o_mae)
r_test_o_mae.append(test_o_mae)
r_vali_f_rmse.append(vali_f_rmse)
r_test_f_rmse.append(test_f_rmse)
r_vali_o_rmse.append(vali_o_rmse)
r_test_o_rmse.append(test_o_rmse)
visualize_train_loss(total_phy_flow_occ_loss)
visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
time_e = time.time()
print ("time for this epoch", time_e - time_s)
performance = {"train": total_phy_flow_occ_loss,\
"vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
"test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
subfile = open(file_name + '/' + 'performance'+'.json','w')
json.dump(performance, subfile)
subfile.close()
#early stop
flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
if n_mse_flow_occ > 0:
min_until_now = min([min_until_now, norm_sum_mse])
else:
min_until_now = 1000000.0
if norm_sum_mse > min_until_now:
no_decrease = no_decrease+1
else:
no_decrease = 0
if no_decrease == max_no_decrease:
print ("Early stop at the " + str(i+1) + "-th epoch")
return total_phy_flow_occ_loss, model
n_mse_flow_occ = n_mse_flow_occ + 1
print ("No_decrease: ", no_decrease)
return total_phy_flow_occ_loss, model
def tensorize(train_vali_test):
result = dict()
result["flow"] = torch.tensor(train_vali_test["flow"])
result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])
result["occupancy"] = torch.tensor(train_vali_test["occupancy"])
result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"])
return result
def normalize_flow_occ(tvt, f_o_mean_std): #tvt: train, vali, test
#flow
f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
f_mask, f = tvt["flow_mask"], tvt["flow"]
tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
#occ
o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()
return tvt
def transform_distance(d_matrix):
sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
sigma_square = sigma*sigma
for i in range(n_row):
for j in range(n_col):
d_i_j = d_matrix[i][j]
d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
return d_matrix
def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
mean_std = json.load(open(mean_std_path))
f_mean, f_std, o_mean, o_std =\
mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
f_o_mean_std = [f_mean, f_std, o_mean, o_std]
train = json.load(open(train_path))
vali = json.load(open(vali_path))
test = json.load(open(test_path))
adj = json.load(open(sensor_adj_path))["adj"]
n_sensor = len(train["flow"][0])
train = tensorize(normalize_flow_occ(train, f_o_mean_std))
vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
test = tensorize(normalize_flow_occ(test, f_o_mean_std))
adj = torch.tensor(transform_distance(adj), device=device).float()
df_sensor_id = json.load(open(sensor_id_path))
sensor_length = [0.0 for i in range(n_sensor)]
for sensor in df_sensor_id:
sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
9.420258522033691
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1546 509 510 [426.01546608926594, 254.82043085525967, 0.1815290985289925, 0.18313943695883658]
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)
cri = nn.MSELoss()
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs 200 ----------------an epoch starts------------------- i_epoch: 0 # batch: 96 i_batch: 0.0 the loss for this batch: 1.756126 flow loss 0.96840364 occ loss 0.5164554 time for this batch 0.6292071342468262 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.7691758 flow loss 0.32693413 occ loss 0.24356198 time for this batch 0.30269742012023926 ---------------------------------- train loss for this epoch: 0.916269
time for this epoch 35.5462920665741 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 1 # batch: 96 i_batch: 0.0 the loss for this batch: 0.7902627 flow loss 0.2270321 occ loss 0.29525086 time for this batch 0.2667880058288574 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.765943 flow loss 0.25759965 occ loss 0.23616791 time for this batch 0.30664849281311035 ---------------------------------- train loss for this epoch: 0.608924
time for this epoch 35.368701696395874 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 2 # batch: 96 i_batch: 0.0 the loss for this batch: 0.64471906 flow loss 0.24113876 occ loss 0.19630018 time for this batch 0.2648639678955078 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5223365 flow loss 0.16252653 occ loss 0.15570612 time for this batch 0.28449010848999023 ---------------------------------- train loss for this epoch: 0.557005
time for this epoch 37.049219846725464 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 3 # batch: 96 i_batch: 0.0 the loss for this batch: 0.64824986 flow loss 0.17933199 occ loss 0.22120753 time for this batch 0.2592496871948242 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4531036 flow loss 0.14424282 occ loss 0.11786274 time for this batch 0.28776979446411133 ---------------------------------- train loss for this epoch: 0.532895
time for this epoch 36.144810914993286 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 4 # batch: 96 i_batch: 0.0 the loss for this batch: 0.55143195 flow loss 0.15508667 occ loss 0.17111066 time for this batch 0.24134159088134766 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5244114 flow loss 0.16505179 occ loss 0.15792157 time for this batch 0.29525279998779297 ---------------------------------- train loss for this epoch: 0.519848
time for this epoch 35.361496448516846 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 5 # batch: 96 i_batch: 0.0 the loss for this batch: 0.5039437 flow loss 0.2195823 occ loss 0.13965668 time for this batch 0.24945402145385742 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4976476 flow loss 0.16113158 occ loss 0.1368231 time for this batch 0.2884237766265869 ---------------------------------- train loss for this epoch: 0.506634
time for this epoch 36.22963738441467 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 6 # batch: 96 i_batch: 0.0 the loss for this batch: 0.47319472 flow loss 0.14466958 occ loss 0.1389649 time for this batch 0.263427734375 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.526335 flow loss 0.16819023 occ loss 0.15133987 time for this batch 0.2713165283203125 ---------------------------------- train loss for this epoch: 0.498644
time for this epoch 36.58422923088074 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 7 # batch: 96 i_batch: 0.0 the loss for this batch: 0.4305077 flow loss 0.1474947 occ loss 0.11070592 time for this batch 0.25819969177246094 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.49361482 flow loss 0.13482773 occ loss 0.12745188 time for this batch 0.3046131134033203 ---------------------------------- train loss for this epoch: 0.491752
time for this epoch 35.9535698890686 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 8 # batch: 96 i_batch: 0.0 the loss for this batch: 0.44863284 flow loss 0.15559214 occ loss 0.13048545 time for this batch 0.27020716667175293 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.49166107 flow loss 0.1409293 occ loss 0.14844431 time for this batch 0.26935744285583496 ---------------------------------- train loss for this epoch: 0.480863
time for this epoch 35.105350732803345 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 9 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41845298 flow loss 0.13154964 occ loss 0.117339276 time for this batch 0.2629361152648926 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.47036994 flow loss 0.13332112 occ loss 0.12250854 time for this batch 0.2754251956939697 ---------------------------------- train loss for this epoch: 0.472906
time for this epoch 35.75370740890503 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 10 # batch: 96 i_batch: 0.0 the loss for this batch: 0.5530465 flow loss 0.1433666 occ loss 0.17468971 time for this batch 0.25672149658203125 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.47371632 flow loss 0.14723374 occ loss 0.12909494 time for this batch 0.27674245834350586 ---------------------------------- train loss for this epoch: 0.464616
time for this epoch 35.85735368728638 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 11 # batch: 96 i_batch: 0.0 the loss for this batch: 0.47892523 flow loss 0.119601615 occ loss 0.16920745 time for this batch 0.2329695224761963 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.44641173 flow loss 0.13179874 occ loss 0.11547644 time for this batch 0.29494142532348633 ---------------------------------- train loss for this epoch: 0.462981
time for this epoch 36.424917221069336 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 12 # batch: 96 i_batch: 0.0 the loss for this batch: 0.45429796 flow loss 0.12543762 occ loss 0.13771133 time for this batch 0.2579801082611084 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4139549 flow loss 0.10663282 occ loss 0.09882314 time for this batch 0.3045318126678467 ---------------------------------- train loss for this epoch: 0.455342
time for this epoch 35.897597551345825 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 13 # batch: 96 i_batch: 0.0 the loss for this batch: 0.39583832 flow loss 0.11436169 occ loss 0.1002083 time for this batch 0.253969669342041 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.49565125 flow loss 0.12979513 occ loss 0.15565854 time for this batch 0.2619469165802002 ---------------------------------- train loss for this epoch: 0.452026
time for this epoch 34.19408988952637 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 14 # batch: 96 i_batch: 0.0 the loss for this batch: 0.40815654 flow loss 0.10774005 occ loss 0.10972221 time for this batch 0.24858450889587402 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5052139 flow loss 0.12485455 occ loss 0.14041676 time for this batch 0.2115175724029541 ---------------------------------- train loss for this epoch: 0.449367
time for this epoch 34.89238882064819 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 15 # batch: 96 i_batch: 0.0 the loss for this batch: 0.37364072 flow loss 0.09824198 occ loss 0.09738622 time for this batch 0.2611970901489258 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4337275 flow loss 0.10734318 occ loss 0.1391923 time for this batch 0.29834747314453125 ---------------------------------- train loss for this epoch: 0.445356
time for this epoch 35.65042281150818 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 16 # batch: 96 i_batch: 0.0 the loss for this batch: 0.446905 flow loss 0.09882761 occ loss 0.14163142 time for this batch 0.23355698585510254 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.42581922 flow loss 0.1347997 occ loss 0.117331564 time for this batch 0.2479712963104248 ---------------------------------- train loss for this epoch: 0.445664
time for this epoch 33.41999697685242 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 17 # batch: 96 i_batch: 0.0 the loss for this batch: 0.45788777 flow loss 0.10802805 occ loss 0.13813433 time for this batch 0.25753092765808105 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.39387184 flow loss 0.113043025 occ loss 0.10760001 time for this batch 0.28408336639404297 ---------------------------------- train loss for this epoch: 0.441337
time for this epoch 35.7675986289978 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 18 # batch: 96 i_batch: 0.0 the loss for this batch: 0.38283712 flow loss 0.10173752 occ loss 0.112210825 time for this batch 0.24501371383666992 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.38801086 flow loss 0.10143746 occ loss 0.10010615 time for this batch 0.29836559295654297 ---------------------------------- train loss for this epoch: 0.440208
time for this epoch 36.280253887176514 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 19 # batch: 96 i_batch: 0.0 the loss for this batch: 0.4913326 flow loss 0.12420317 occ loss 0.1504476 time for this batch 0.25475478172302246 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5044532 flow loss 0.12461852 occ loss 0.1410854 time for this batch 0.2899470329284668 ---------------------------------- train loss for this epoch: 0.439968
time for this epoch 35.2817759513855 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 20 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41618297 flow loss 0.11357675 occ loss 0.10196097 time for this batch 0.2518939971923828 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3565154 flow loss 0.10502416 occ loss 0.08164093 time for this batch 0.31593942642211914 ---------------------------------- train loss for this epoch: 0.43553
time for this epoch 35.86111497879028 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 21 # batch: 96 i_batch: 0.0 the loss for this batch: 0.46417153 flow loss 0.10817044 occ loss 0.14338371 time for this batch 0.2766733169555664 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.43164778 flow loss 0.115751095 occ loss 0.10615767 time for this batch 0.2970426082611084 ---------------------------------- train loss for this epoch: 0.434603
time for this epoch 34.591973066329956 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 22 # batch: 96 i_batch: 0.0 the loss for this batch: 0.47544318 flow loss 0.13187912 occ loss 0.13644698 time for this batch 0.2674679756164551 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4482858 flow loss 0.11385987 occ loss 0.14300552 time for this batch 0.30493879318237305 ---------------------------------- train loss for this epoch: 0.435775
time for this epoch 36.17198634147644 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 23 # batch: 96 i_batch: 0.0 the loss for this batch: 0.47184122 flow loss 0.11820487 occ loss 0.1478512 time for this batch 0.2584824562072754 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5118351 flow loss 0.12984437 occ loss 0.15313245 time for this batch 0.30138564109802246 ---------------------------------- train loss for this epoch: 0.432661
time for this epoch 36.07411980628967 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 24 # batch: 96 i_batch: 0.0 the loss for this batch: 0.44047296 flow loss 0.09795288 occ loss 0.121616 time for this batch 0.2489626407623291 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.48162097 flow loss 0.12431762 occ loss 0.13717936 time for this batch 0.314716100692749 ---------------------------------- train loss for this epoch: 0.432456
time for this epoch 36.10537242889404 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 25 # batch: 96 i_batch: 0.0 the loss for this batch: 0.46044308 flow loss 0.114021026 occ loss 0.12224865 time for this batch 0.2643699645996094 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.44729704 flow loss 0.10871518 occ loss 0.12713906 time for this batch 0.2772181034088135 ---------------------------------- train loss for this epoch: 0.42887
time for this epoch 35.97198128700256 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 26 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41919953 flow loss 0.13933146 occ loss 0.10726908 time for this batch 0.2653377056121826 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.520895 flow loss 0.11200194 occ loss 0.16977812 time for this batch 0.30445075035095215 ---------------------------------- train loss for this epoch: 0.432777
time for this epoch 36.331488370895386 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 27 # batch: 96 i_batch: 0.0 the loss for this batch: 0.3975486 flow loss 0.08314574 occ loss 0.11044614 time for this batch 0.26924705505371094 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3469854 flow loss 0.10893553 occ loss 0.08882846 time for this batch 0.31287670135498047 ---------------------------------- train loss for this epoch: 0.427952
time for this epoch 36.59539437294006 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 28 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41709697 flow loss 0.10252827 occ loss 0.105297 time for this batch 0.24927115440368652 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.42741072 flow loss 0.11042383 occ loss 0.13674329 time for this batch 0.2944679260253906 ---------------------------------- train loss for this epoch: 0.426581
time for this epoch 35.88730502128601 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 29 # batch: 96 i_batch: 0.0 the loss for this batch: 0.4363548 flow loss 0.09583878 occ loss 0.13200848 time for this batch 0.23067784309387207 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4520461 flow loss 0.098678656 occ loss 0.12421869 time for this batch 0.29360175132751465 ---------------------------------- train loss for this epoch: 0.424721
time for this epoch 36.42902588844299 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 30 # batch: 96 i_batch: 0.0 the loss for this batch: 0.37328625 flow loss 0.095999345 occ loss 0.08981567 time for this batch 0.22563433647155762 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.50733775 flow loss 0.12504162 occ loss 0.15161136 time for this batch 0.29054975509643555 ---------------------------------- train loss for this epoch: 0.423945
time for this epoch 34.92092823982239 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 31 # batch: 96 i_batch: 0.0 the loss for this batch: 0.455963 flow loss 0.10707607 occ loss 0.13429768 time for this batch 0.2549893856048584 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.47897613 flow loss 0.11291302 occ loss 0.15254872 time for this batch 0.2838146686553955 ---------------------------------- train loss for this epoch: 0.42242
time for this epoch 35.52696704864502 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 32 # batch: 96 i_batch: 0.0 the loss for this batch: 0.3688619 flow loss 0.10526331 occ loss 0.09786952 time for this batch 0.2568368911743164 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.41900203 flow loss 0.103023 occ loss 0.1267339 time for this batch 0.29700350761413574 ---------------------------------- train loss for this epoch: 0.419207
time for this epoch 35.09530162811279 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 33 # batch: 96 i_batch: 0.0 the loss for this batch: 0.3769046 flow loss 0.090063184 occ loss 0.100339964 time for this batch 0.2255101203918457 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4550749 flow loss 0.1213135 occ loss 0.123904765 time for this batch 0.2682795524597168 ---------------------------------- train loss for this epoch: 0.41423
time for this epoch 35.157729387283325 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 34 # batch: 96 i_batch: 0.0 the loss for this batch: 0.35396945 flow loss 0.08546006 occ loss 0.09928854 time for this batch 0.26921677589416504 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.46812063 flow loss 0.118434265 occ loss 0.13560781 time for this batch 0.29509854316711426 ---------------------------------- train loss for this epoch: 0.409579
time for this epoch 35.83171987533569 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 35 # batch: 96 i_batch: 0.0 the loss for this batch: 0.37681156 flow loss 0.113732874 occ loss 0.09427478 time for this batch 0.2524299621582031 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3939724 flow loss 0.10091384 occ loss 0.12153967 time for this batch 0.30681777000427246 ---------------------------------- train loss for this epoch: 0.403603
time for this epoch 36.002930641174316 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 36 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41018596 flow loss 0.10065482 occ loss 0.11418442 time for this batch 0.2620077133178711 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4300273 flow loss 0.11179461 occ loss 0.13118339 time for this batch 0.29457616806030273 ---------------------------------- train loss for this epoch: 0.394732
time for this epoch 35.69440460205078 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 37 # batch: 96 i_batch: 0.0 the loss for this batch: 0.35016608 flow loss 0.08908997 occ loss 0.11064067 time for this batch 0.218369722366333 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.30897585 flow loss 0.09430329 occ loss 0.08395195 time for this batch 0.2862095832824707 ---------------------------------- train loss for this epoch: 0.381798
time for this epoch 35.65648078918457 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 38 # batch: 96 i_batch: 0.0 the loss for this batch: 0.39902508 flow loss 0.10356936 occ loss 0.11990787 time for this batch 0.25845956802368164 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.34722477 flow loss 0.092266604 occ loss 0.113696046 time for this batch 0.2992846965789795 ---------------------------------- train loss for this epoch: 0.368423
time for this epoch 35.128522872924805 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 39 # batch: 96 i_batch: 0.0 the loss for this batch: 0.41394854 flow loss 0.12348987 occ loss 0.124913044 time for this batch 0.2512226104736328 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.31224024 flow loss 0.083869785 occ loss 0.106012546 time for this batch 0.3101785182952881 ---------------------------------- train loss for this epoch: 0.35003
time for this epoch 35.84739065170288 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 40 # batch: 96 i_batch: 0.0 the loss for this batch: 0.3051169 flow loss 0.096484736 occ loss 0.10001897 time for this batch 0.25095033645629883 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3504489 flow loss 0.10232294 occ loss 0.1321321 time for this batch 0.30506086349487305 ---------------------------------- train loss for this epoch: 0.32895
time for this epoch 34.799466133117676 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 41 # batch: 96 i_batch: 0.0 the loss for this batch: 0.295658 flow loss 0.083993904 occ loss 0.10921156 time for this batch 0.254105806350708 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22148629 flow loss 0.081246726 occ loss 0.06595018 time for this batch 0.27036237716674805 ---------------------------------- train loss for this epoch: 0.306041
time for this epoch 35.50572347640991 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 42 # batch: 96 i_batch: 0.0 the loss for this batch: 0.3353023 flow loss 0.097058065 occ loss 0.12982446 time for this batch 0.1973581314086914 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29114234 flow loss 0.095731415 occ loss 0.11684513 time for this batch 0.303295373916626 ---------------------------------- train loss for this epoch: 0.281347
time for this epoch 34.94415330886841 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 43 # batch: 96 i_batch: 0.0 the loss for this batch: 0.27204332 flow loss 0.08480581 occ loss 0.104991116 time for this batch 0.26788973808288574 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24764699 flow loss 0.07885796 occ loss 0.10824395 time for this batch 0.3014719486236572 ---------------------------------- train loss for this epoch: 0.254909
time for this epoch 35.372039794921875 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 44 # batch: 96 i_batch: 0.0 the loss for this batch: 0.24263099 flow loss 0.073979266 occ loss 0.10914522 time for this batch 0.24545502662658691 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22045267 flow loss 0.07377743 occ loss 0.11353393 time for this batch 0.2929513454437256 ---------------------------------- train loss for this epoch: 0.232226
time for this epoch 35.94161319732666 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 45 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21767148 flow loss 0.07416888 occ loss 0.110252954 time for this batch 0.24445414543151855 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20003831 flow loss 0.074296035 occ loss 0.10718791 time for this batch 0.2989950180053711 ---------------------------------- train loss for this epoch: 0.214334
time for this epoch 36.149083852767944 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 46 # batch: 96 i_batch: 0.0 the loss for this batch: 0.2047111 flow loss 0.07573789 occ loss 0.10929016 time for this batch 0.24228954315185547 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20431921 flow loss 0.07746237 occ loss 0.11293067 time for this batch 0.25021839141845703 ---------------------------------- train loss for this epoch: 0.200838
time for this epoch 34.93404197692871 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 47 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21907227 flow loss 0.08033663 occ loss 0.12506995 time for this batch 0.26567983627319336 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22054695 flow loss 0.08543184 occ loss 0.12910184 time for this batch 0.2933344841003418 ---------------------------------- train loss for this epoch: 0.193623
time for this epoch 34.762622356414795 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 48 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21236078 flow loss 0.07939019 occ loss 0.12742601 time for this batch 0.2582974433898926 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1787296 flow loss 0.07278452 occ loss 0.101983644 time for this batch 0.28963780403137207 ---------------------------------- train loss for this epoch: 0.189209
time for this epoch 36.1998028755188 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 49 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1976913 flow loss 0.07533162 occ loss 0.120719045 time for this batch 0.2443220615386963 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16140793 flow loss 0.062335096 occ loss 0.09640826 time for this batch 0.29781436920166016 ---------------------------------- train loss for this epoch: 0.185995
time for this epoch 35.63302159309387 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 50 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1826124 flow loss 0.07059575 occ loss 0.109497964 time for this batch 0.2772204875946045 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22364861 flow loss 0.078151934 occ loss 0.1444472 time for this batch 0.27010178565979004 ---------------------------------- train loss for this epoch: 0.184463
time for this epoch 34.803637742996216 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 51 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20148373 flow loss 0.07690339 occ loss 0.12231535 time for this batch 0.2769303321838379 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19337524 flow loss 0.06918977 occ loss 0.12285019 time for this batch 0.28499841690063477 ---------------------------------- train loss for this epoch: 0.183613
time for this epoch 35.61232900619507 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 52 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20657057 flow loss 0.07638951 occ loss 0.12892482 time for this batch 0.2516157627105713 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17418733 flow loss 0.07147722 occ loss 0.101738326 time for this batch 0.2968742847442627 ---------------------------------- train loss for this epoch: 0.184142
time for this epoch 35.028326749801636 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 53 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16335353 flow loss 0.060786285 occ loss 0.101527 time for this batch 0.261005163192749 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18930201 flow loss 0.07004531 occ loss 0.11829463 time for this batch 0.28554725646972656 ---------------------------------- train loss for this epoch: 0.183568
time for this epoch 36.13008451461792 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 54 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18587199 flow loss 0.06421802 occ loss 0.12081852 time for this batch 0.2596447467803955 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1590686 flow loss 0.07073853 occ loss 0.08768538 time for this batch 0.2879030704498291 ---------------------------------- train loss for this epoch: 0.181268
time for this epoch 35.76178431510925 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 55 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19883843 flow loss 0.076079406 occ loss 0.121598795 time for this batch 0.2644338607788086 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1849714 flow loss 0.07941136 occ loss 0.104452856 time for this batch 0.303638219833374 ---------------------------------- train loss for this epoch: 0.182368
time for this epoch 35.51430892944336 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 56 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14018427 flow loss 0.062171973 occ loss 0.075262174 time for this batch 0.2584266662597656 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17046145 flow loss 0.063576914 occ loss 0.10621622 time for this batch 0.3066260814666748 ---------------------------------- train loss for this epoch: 0.182586
time for this epoch 35.705058574676514 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 57 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1665741 flow loss 0.071154326 occ loss 0.09486615 time for this batch 0.2758338451385498 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16243358 flow loss 0.068643115 occ loss 0.092794806 time for this batch 0.314800500869751 ---------------------------------- train loss for this epoch: 0.180245
time for this epoch 36.31664681434631 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 58 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17250068 flow loss 0.06906291 occ loss 0.102777675 time for this batch 0.2737698554992676 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24282578 flow loss 0.0815114 occ loss 0.16064242 time for this batch 0.30222606658935547 ---------------------------------- train loss for this epoch: 0.180515
time for this epoch 35.302815198898315 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 59 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20877694 flow loss 0.07987652 occ loss 0.12794197 time for this batch 0.2430739402770996 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17929704 flow loss 0.0748191 occ loss 0.103622526 time for this batch 0.2984952926635742 ---------------------------------- train loss for this epoch: 0.18122
time for this epoch 35.12420201301575 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 60 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19129547 flow loss 0.0754445 occ loss 0.11505101 time for this batch 0.26212501525878906 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16212957 flow loss 0.061395224 occ loss 0.09998418 time for this batch 0.2987229824066162 ---------------------------------- train loss for this epoch: 0.178272
time for this epoch 36.1417977809906 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 61 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17968763 flow loss 0.07329112 occ loss 0.10561969 time for this batch 0.25141024589538574 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2352563 flow loss 0.08203592 occ loss 0.15223072 time for this batch 0.29625630378723145 ---------------------------------- train loss for this epoch: 0.180814
time for this epoch 36.30387997627258 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 62 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16609463 flow loss 0.06716981 occ loss 0.09801696 time for this batch 0.2568655014038086 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17890894 flow loss 0.07816091 occ loss 0.099919416 time for this batch 0.30185556411743164 ---------------------------------- train loss for this epoch: 0.179834
time for this epoch 36.11385202407837 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 63 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18904693 flow loss 0.074148074 occ loss 0.11412722 time for this batch 0.22465109825134277 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18088545 flow loss 0.073364 occ loss 0.106486246 time for this batch 0.30543041229248047 ---------------------------------- train loss for this epoch: 0.181278
time for this epoch 36.46645951271057 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 64 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16378646 flow loss 0.06444696 occ loss 0.098823905 time for this batch 0.2521672248840332 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2022571 flow loss 0.0819796 occ loss 0.119549334 time for this batch 0.29840970039367676 ---------------------------------- train loss for this epoch: 0.179958
time for this epoch 36.30212140083313 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 65 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17393962 flow loss 0.065474406 occ loss 0.10736803 time for this batch 0.2595970630645752 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15900974 flow loss 0.06946608 occ loss 0.0886134 time for this batch 0.29216909408569336 ---------------------------------- train loss for this epoch: 0.177619
time for this epoch 36.53178572654724 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 66 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1555474 flow loss 0.06340043 occ loss 0.09121626 time for this batch 0.2494795322418213 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1547137 flow loss 0.06493128 occ loss 0.08897761 time for this batch 0.2768528461456299 ---------------------------------- train loss for this epoch: 0.177394
time for this epoch 34.96186017990112 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 67 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16221932 flow loss 0.061696123 occ loss 0.099805705 time for this batch 0.24313807487487793 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17618465 flow loss 0.0682056 occ loss 0.107261226 time for this batch 0.2983405590057373 ---------------------------------- train loss for this epoch: 0.176097
time for this epoch 36.046433448791504 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 68 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15187088 flow loss 0.05862864 occ loss 0.09247688 time for this batch 0.25472474098205566 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20847256 flow loss 0.07701848 occ loss 0.13086286 time for this batch 0.30264949798583984 ---------------------------------- train loss for this epoch: 0.178273
time for this epoch 36.27314329147339 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 69 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17417818 flow loss 0.07278713 occ loss 0.100539796 time for this batch 0.2670280933380127 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18541418 flow loss 0.068778165 occ loss 0.11589819 time for this batch 0.3021848201751709 ---------------------------------- train loss for this epoch: 0.178246
time for this epoch 35.856016397476196 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 70 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21581231 flow loss 0.07632032 occ loss 0.13860479 time for this batch 0.2787656784057617 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18557203 flow loss 0.07006866 occ loss 0.11442894 time for this batch 0.2810988426208496 ---------------------------------- train loss for this epoch: 0.176841
time for this epoch 36.010557889938354 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 71 # batch: 96 i_batch: 0.0 the loss for this batch: 0.23150471 flow loss 0.0750489 occ loss 0.15562907 time for this batch 0.2546401023864746 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21260475 flow loss 0.08089239 occ loss 0.13069199 time for this batch 0.3048975467681885 ---------------------------------- train loss for this epoch: 0.176553
time for this epoch 36.047274112701416 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 72 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14073075 flow loss 0.06269498 occ loss 0.077376574 time for this batch 0.25598883628845215 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19906275 flow loss 0.07520309 occ loss 0.12275161 time for this batch 0.28632020950317383 ---------------------------------- train loss for this epoch: 0.178198
time for this epoch 36.027724504470825 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 73 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12857373 flow loss 0.053558815 occ loss 0.07375705 time for this batch 0.25538039207458496 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14640312 flow loss 0.062266264 occ loss 0.08337094 time for this batch 0.29807186126708984 ---------------------------------- train loss for this epoch: 0.176748
time for this epoch 35.78542947769165 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 74 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20540783 flow loss 0.07356361 occ loss 0.13089906 time for this batch 0.2588331699371338 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18364294 flow loss 0.06570559 occ loss 0.117073804 time for this batch 0.2921876907348633 ---------------------------------- train loss for this epoch: 0.175068
time for this epoch 35.899118185043335 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 75 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14309426 flow loss 0.062207323 occ loss 0.07974935 time for this batch 0.2660961151123047 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17353748 flow loss 0.06697091 occ loss 0.10580147 time for this batch 0.28576016426086426 ---------------------------------- train loss for this epoch: 0.175597
time for this epoch 35.56472587585449 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 76 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14805058 flow loss 0.05894515 occ loss 0.08843532 time for this batch 0.2700047492980957 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19052269 flow loss 0.0713113 occ loss 0.11819305 time for this batch 0.2916395664215088 ---------------------------------- train loss for this epoch: 0.174831
time for this epoch 35.850205421447754 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 77 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19912404 flow loss 0.07374568 occ loss 0.124759026 time for this batch 0.2669544219970703 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18986535 flow loss 0.07092313 occ loss 0.118022814 time for this batch 0.2703282833099365 ---------------------------------- train loss for this epoch: 0.175375
time for this epoch 35.58188247680664 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 78 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16870596 flow loss 0.06469554 occ loss 0.10319823 time for this batch 0.23950910568237305 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14497223 flow loss 0.05811251 occ loss 0.08560113 time for this batch 0.26145434379577637 ---------------------------------- train loss for this epoch: 0.175711
time for this epoch 35.62810683250427 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 79 # batch: 96 i_batch: 0.0 the loss for this batch: 0.22210418 flow loss 0.07783774 occ loss 0.14343894 time for this batch 0.23857855796813965 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19293754 flow loss 0.069531105 occ loss 0.12278404 time for this batch 0.3013112545013428 ---------------------------------- train loss for this epoch: 0.174162
time for this epoch 36.09221267700195 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 80 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19990097 flow loss 0.07042428 occ loss 0.12865004 time for this batch 0.24941492080688477 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19666727 flow loss 0.075448655 occ loss 0.12039861 time for this batch 0.2980198860168457 ---------------------------------- train loss for this epoch: 0.174453
time for this epoch 35.82158279418945 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 81 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17310116 flow loss 0.06615764 occ loss 0.1060185 time for this batch 0.2541351318359375 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20560594 flow loss 0.07974369 occ loss 0.12422366 time for this batch 0.30580711364746094 ---------------------------------- train loss for this epoch: 0.176544
time for this epoch 35.67205810546875 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 82 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16451256 flow loss 0.06395296 occ loss 0.09997208 time for this batch 0.2365109920501709 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1755137 flow loss 0.066751145 occ loss 0.107194304 time for this batch 0.27672529220581055 ---------------------------------- train loss for this epoch: 0.174006
time for this epoch 35.319366693496704 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 83 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18455452 flow loss 0.07175092 occ loss 0.111757845 time for this batch 0.2568793296813965 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1818819 flow loss 0.06571149 occ loss 0.11519695 time for this batch 0.2887842655181885 ---------------------------------- train loss for this epoch: 0.173423
time for this epoch 36.22359919548035 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 84 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16761215 flow loss 0.06283234 occ loss 0.104054235 time for this batch 0.2525951862335205 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15858328 flow loss 0.064277925 occ loss 0.09339289 time for this batch 0.2982490062713623 ---------------------------------- train loss for this epoch: 0.17653
time for this epoch 36.12782144546509 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 85 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14822574 flow loss 0.066589184 occ loss 0.08093734 time for this batch 0.2663259506225586 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17947254 flow loss 0.07253623 occ loss 0.10601501 time for this batch 0.3018186092376709 ---------------------------------- train loss for this epoch: 0.173782
time for this epoch 35.41782283782959 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 86 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16392288 flow loss 0.062346783 occ loss 0.10083972 time for this batch 0.2701094150543213 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14363073 flow loss 0.05962032 occ loss 0.083401255 time for this batch 0.2984335422515869 ---------------------------------- train loss for this epoch: 0.174515
time for this epoch 35.94626045227051 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 87 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16845307 flow loss 0.06861934 occ loss 0.09869199 time for this batch 0.26525330543518066 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17226888 flow loss 0.0699509 occ loss 0.10126868 time for this batch 0.2998464107513428 ---------------------------------- train loss for this epoch: 0.172742
time for this epoch 35.28374361991882 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 88 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17036267 flow loss 0.06874661 occ loss 0.10051472 time for this batch 0.27605175971984863 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1921688 flow loss 0.07044844 occ loss 0.12089019 time for this batch 0.28609585762023926 ---------------------------------- train loss for this epoch: 0.172419
time for this epoch 35.75591683387756 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 89 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1437006 flow loss 0.066267565 occ loss 0.07631171 time for this batch 0.2639925479888916 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18563831 flow loss 0.069431394 occ loss 0.11537125 time for this batch 0.27533793449401855 ---------------------------------- train loss for this epoch: 0.171766
time for this epoch 34.9286367893219 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 90 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1899916 flow loss 0.066792786 occ loss 0.12183957 time for this batch 0.26248764991760254 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17338726 flow loss 0.06592671 occ loss 0.10666962 time for this batch 0.2870357036590576 ---------------------------------- train loss for this epoch: 0.172418
time for this epoch 35.84917235374451 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 91 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1669523 flow loss 0.06796462 occ loss 0.09804562 time for this batch 0.2639782428741455 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17971894 flow loss 0.06703968 occ loss 0.111628935 time for this batch 0.3011813163757324 ---------------------------------- train loss for this epoch: 0.172649
time for this epoch 36.00700616836548 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 92 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13031538 flow loss 0.055755705 occ loss 0.07358056 time for this batch 0.2678096294403076 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18614526 flow loss 0.06694396 occ loss 0.11861219 time for this batch 0.30106234550476074 ---------------------------------- train loss for this epoch: 0.170485
time for this epoch 35.92256283760071 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 93 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17399646 flow loss 0.0704782 occ loss 0.10285781 time for this batch 0.26882338523864746 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14654066 flow loss 0.06561657 occ loss 0.07990454 time for this batch 0.26110100746154785 ---------------------------------- train loss for this epoch: 0.171181
time for this epoch 35.136146545410156 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 94 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17925906 flow loss 0.06961416 occ loss 0.10907468 time for this batch 0.26853108406066895 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16175982 flow loss 0.06334969 occ loss 0.09751119 time for this batch 0.2962038516998291 ---------------------------------- train loss for this epoch: 0.172813
time for this epoch 35.84126567840576 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 95 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13919745 flow loss 0.06252044 occ loss 0.07566107 time for this batch 0.265622615814209 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17003743 flow loss 0.06402331 occ loss 0.10473329 time for this batch 0.2732689380645752 ---------------------------------- train loss for this epoch: 0.170329
time for this epoch 34.65124297142029 No_decrease: 13 ----------------an epoch starts------------------- i_epoch: 96 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1824263 flow loss 0.067468 occ loss 0.11392409 time for this batch 0.24829959869384766 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15472004 flow loss 0.06327815 occ loss 0.0906212 time for this batch 0.25166773796081543 ---------------------------------- train loss for this epoch: 0.172215
time for this epoch 33.35471439361572 No_decrease: 14 ----------------an epoch starts------------------- i_epoch: 97 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15053113 flow loss 0.06430782 occ loss 0.08565785 time for this batch 0.26958131790161133 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13777101 flow loss 0.057096854 occ loss 0.079850085 time for this batch 0.2913544178009033 ---------------------------------- train loss for this epoch: 0.171034
time for this epoch 35.63239336013794 No_decrease: 15 ----------------an epoch starts------------------- i_epoch: 98 # batch: 96 i_batch: 0.0 the loss for this batch: 0.22729017 flow loss 0.07047774 occ loss 0.15584037 time for this batch 0.25759220123291016 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1784747 flow loss 0.070728585 occ loss 0.10641592 time for this batch 0.28887271881103516 ---------------------------------- train loss for this epoch: 0.17117
time for this epoch 35.95522618293762 No_decrease: 16 ----------------an epoch starts------------------- i_epoch: 99 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19365247 flow loss 0.0773244 occ loss 0.11559213 time for this batch 0.2680821418762207 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19511029 flow loss 0.072318144 occ loss 0.121697575 time for this batch 0.3059711456298828 ---------------------------------- train loss for this epoch: 0.17114
time for this epoch 36.03052806854248 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 100 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18382917 flow loss 0.07142087 occ loss 0.111550584 time for this batch 0.2662816047668457 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14682268 flow loss 0.05985432 occ loss 0.08621209 time for this batch 0.29518604278564453 ---------------------------------- train loss for this epoch: 0.171666
time for this epoch 35.27071404457092 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 101 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13862193 flow loss 0.05571521 occ loss 0.08208578 time for this batch 0.27253127098083496 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19145031 flow loss 0.06771027 occ loss 0.12308605 time for this batch 0.2843635082244873 ---------------------------------- train loss for this epoch: 0.169693
time for this epoch 36.302613496780396 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 102 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19748104 flow loss 0.071220726 occ loss 0.12530187 time for this batch 0.23814082145690918 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14927016 flow loss 0.059995897 occ loss 0.08863657 time for this batch 0.2110915184020996 ---------------------------------- train loss for this epoch: 0.168606
time for this epoch 35.499664068222046 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 103 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14776675 flow loss 0.05654737 occ loss 0.090597115 time for this batch 0.25837254524230957 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15584572 flow loss 0.0700638 occ loss 0.08521566 time for this batch 0.28821682929992676 ---------------------------------- train loss for this epoch: 0.170635
time for this epoch 35.85975956916809 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 104 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20048954 flow loss 0.07721583 occ loss 0.12150759 time for this batch 0.24973273277282715 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16130173 flow loss 0.06123617 occ loss 0.099188745 time for this batch 0.2896862030029297 ---------------------------------- train loss for this epoch: 0.168797
time for this epoch 35.19462060928345 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 105 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16971557 flow loss 0.065887526 occ loss 0.102793045 time for this batch 0.2492680549621582 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18537907 flow loss 0.066772915 occ loss 0.11778651 time for this batch 0.29853081703186035 ---------------------------------- train loss for this epoch: 0.172405
time for this epoch 35.25805687904358 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 106 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18608963 flow loss 0.065567374 occ loss 0.119753346 time for this batch 0.25777268409729004 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14583428 flow loss 0.060112637 occ loss 0.08491774 time for this batch 0.290330171585083 ---------------------------------- train loss for this epoch: 0.169008
time for this epoch 35.82243514060974 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 107 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15132196 flow loss 0.059302967 occ loss 0.09086312 time for this batch 0.2657811641693115 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19752368 flow loss 0.06871441 occ loss 0.1280359 time for this batch 0.30043840408325195 ---------------------------------- train loss for this epoch: 0.168041
time for this epoch 36.217986822128296 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 108 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14523916 flow loss 0.05943706 occ loss 0.085010126 time for this batch 0.2522103786468506 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14921159 flow loss 0.06480461 occ loss 0.0832153 time for this batch 0.2931358814239502 ---------------------------------- train loss for this epoch: 0.168551
time for this epoch 36.54414439201355 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 109 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18801223 flow loss 0.0755666 occ loss 0.11156944 time for this batch 0.2644948959350586 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17133515 flow loss 0.06323689 occ loss 0.10718493 time for this batch 0.29678964614868164 ---------------------------------- train loss for this epoch: 0.169542
time for this epoch 36.99984073638916 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 110 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18474686 flow loss 0.06602886 occ loss 0.11794116 time for this batch 0.24725794792175293 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14282207 flow loss 0.060438827 occ loss 0.08176793 time for this batch 0.29830169677734375 ---------------------------------- train loss for this epoch: 0.168295
time for this epoch 35.71513485908508 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 111 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17751063 flow loss 0.07513848 occ loss 0.101702005 time for this batch 0.26201438903808594 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19898699 flow loss 0.070259884 occ loss 0.12775458 time for this batch 0.3097550868988037 ---------------------------------- train loss for this epoch: 0.168511
time for this epoch 35.73501110076904 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 112 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18094252 flow loss 0.064714424 occ loss 0.11537253 time for this batch 0.2692084312438965 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15326537 flow loss 0.06480347 occ loss 0.0878365 time for this batch 0.2810053825378418 ---------------------------------- train loss for this epoch: 0.167722
time for this epoch 36.388078927993774 No_decrease: 13 ----------------an epoch starts------------------- i_epoch: 113 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18841113 flow loss 0.069949426 occ loss 0.11775675 time for this batch 0.2638967037200928 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1506983 flow loss 0.06491655 occ loss 0.08487445 time for this batch 0.3000068664550781 ---------------------------------- train loss for this epoch: 0.168696
time for this epoch 36.22943043708801 No_decrease: 14 ----------------an epoch starts------------------- i_epoch: 114 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1513195 flow loss 0.061665587 occ loss 0.08919769 time for this batch 0.2660079002380371 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14817843 flow loss 0.05838098 occ loss 0.08905777 time for this batch 0.3106727600097656 ---------------------------------- train loss for this epoch: 0.167337
time for this epoch 36.21411466598511 No_decrease: 15 ----------------an epoch starts------------------- i_epoch: 115 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17068912 flow loss 0.065765105 occ loss 0.104191154 time for this batch 0.2539398670196533 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.179992 flow loss 0.06274154 occ loss 0.116555765 time for this batch 0.2882530689239502 ---------------------------------- train loss for this epoch: 0.167658
time for this epoch 36.06180429458618 No_decrease: 16 ----------------an epoch starts------------------- i_epoch: 116 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16078553 flow loss 0.062046073 occ loss 0.09775417 time for this batch 0.25798892974853516 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13351859 flow loss 0.058307458 occ loss 0.07422486 time for this batch 0.23889732360839844 ---------------------------------- train loss for this epoch: 0.166971
time for this epoch 35.747259855270386 No_decrease: 17 ----------------an epoch starts------------------- i_epoch: 117 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19687915 flow loss 0.06595999 occ loss 0.1299097 time for this batch 0.2570075988769531 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19140714 flow loss 0.06877185 occ loss 0.12182631 time for this batch 0.2782907485961914 ---------------------------------- train loss for this epoch: 0.166952
time for this epoch 35.54017734527588 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 118 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15992452 flow loss 0.06467429 occ loss 0.09442791 time for this batch 0.2734537124633789 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17479111 flow loss 0.06155282 occ loss 0.11190727 time for this batch 0.3027524948120117 ---------------------------------- train loss for this epoch: 0.167984
time for this epoch 35.28569555282593 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 119 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21105213 flow loss 0.07216683 occ loss 0.13797064 time for this batch 0.25921201705932617 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15090664 flow loss 0.056996893 occ loss 0.09259641 time for this batch 0.29767537117004395 ---------------------------------- train loss for this epoch: 0.166573
time for this epoch 36.52397584915161 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 120 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18526545 flow loss 0.06808173 occ loss 0.11605248 time for this batch 0.2691330909729004 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14845903 flow loss 0.05554735 occ loss 0.09249969 time for this batch 0.2902567386627197 ---------------------------------- train loss for this epoch: 0.165953
time for this epoch 36.09651708602905 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 121 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15452021 flow loss 0.05586844 occ loss 0.097964615 time for this batch 0.26357483863830566 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18051723 flow loss 0.064815454 occ loss 0.11482169 time for this batch 0.3009369373321533 ---------------------------------- train loss for this epoch: 0.165767
time for this epoch 36.065746545791626 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 122 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18440987 flow loss 0.06527475 occ loss 0.11754483 time for this batch 0.2586243152618408 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19673747 flow loss 0.07269813 occ loss 0.12275551 time for this batch 0.30745935440063477 ---------------------------------- train loss for this epoch: 0.166726
time for this epoch 35.79621505737305 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 123 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1756933 flow loss 0.06474559 occ loss 0.110019684 time for this batch 0.26284050941467285 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16327827 flow loss 0.065372735 occ loss 0.09713858 time for this batch 0.26116037368774414 ---------------------------------- train loss for this epoch: 0.16686
time for this epoch 36.291017293930054 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 124 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13908443 flow loss 0.056497842 occ loss 0.08153422 time for this batch 0.2691831588745117 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20510605 flow loss 0.076941825 occ loss 0.12755738 time for this batch 0.3001992702484131 ---------------------------------- train loss for this epoch: 0.166072
time for this epoch 35.80522918701172 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 125 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19285603 flow loss 0.062092416 occ loss 0.13007852 time for this batch 0.26743173599243164 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17719546 flow loss 0.06736508 occ loss 0.10915974 time for this batch 0.2804434299468994 ---------------------------------- train loss for this epoch: 0.168625
time for this epoch 35.78720760345459 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 126 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13736126 flow loss 0.06473019 occ loss 0.07148071 time for this batch 0.2519512176513672 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15064935 flow loss 0.05813848 occ loss 0.09185129 time for this batch 0.29706454277038574 ---------------------------------- train loss for this epoch: 0.166519
time for this epoch 35.94016718864441 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 127 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17597789 flow loss 0.06695326 occ loss 0.10811217 time for this batch 0.24572300910949707 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16476296 flow loss 0.059370767 occ loss 0.104167685 time for this batch 0.3047666549682617 ---------------------------------- train loss for this epoch: 0.167058
time for this epoch 36.441943645477295 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 128 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15974542 flow loss 0.061560277 occ loss 0.09747472 time for this batch 0.2700495719909668 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17378312 flow loss 0.061568044 occ loss 0.11136644 time for this batch 0.3208801746368408 ---------------------------------- train loss for this epoch: 0.165275
time for this epoch 35.56499886512756 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 129 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14911258 flow loss 0.060733285 occ loss 0.08722798 time for this batch 0.29329442977905273 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16176897 flow loss 0.06487979 occ loss 0.09624905 time for this batch 0.299100399017334 ---------------------------------- train loss for this epoch: 0.164976
time for this epoch 36.05239772796631 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 130 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16111392 flow loss 0.05791893 occ loss 0.1026149 time for this batch 0.2617664337158203 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18926121 flow loss 0.066344455 occ loss 0.12215179 time for this batch 0.2985191345214844 ---------------------------------- train loss for this epoch: 0.164508
time for this epoch 34.91592335700989 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 131 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17399368 flow loss 0.06280025 occ loss 0.110477746 time for this batch 0.2521655559539795 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.12310291 flow loss 0.053632878 occ loss 0.06887927 time for this batch 0.3116931915283203 ---------------------------------- train loss for this epoch: 0.165528
time for this epoch 36.113171339035034 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 132 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19262879 flow loss 0.07276632 occ loss 0.11890479 time for this batch 0.2547125816345215 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19775216 flow loss 0.070159175 occ loss 0.12678385 time for this batch 0.27264952659606934 ---------------------------------- train loss for this epoch: 0.165681
time for this epoch 35.66111731529236 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 133 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14820036 flow loss 0.057528228 occ loss 0.089986004 time for this batch 0.22443127632141113 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14508349 flow loss 0.063489065 occ loss 0.08078793 time for this batch 0.2763831615447998 ---------------------------------- train loss for this epoch: 0.16799
time for this epoch 35.596271991729736 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 134 # batch: 96 i_batch: 0.0 the loss for this batch: 0.20301802 flow loss 0.07096529 occ loss 0.13122286 time for this batch 0.26012325286865234 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16000837 flow loss 0.064371645 occ loss 0.094972245 time for this batch 0.308544397354126 ---------------------------------- train loss for this epoch: 0.163128
time for this epoch 35.961509227752686 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 135 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14701864 flow loss 0.062383514 occ loss 0.083751686 time for this batch 0.24263739585876465 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17839578 flow loss 0.061300468 occ loss 0.11613542 time for this batch 0.29031896591186523 ---------------------------------- train loss for this epoch: 0.162938
time for this epoch 36.55561661720276 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 136 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15910903 flow loss 0.065040104 occ loss 0.09348571 time for this batch 0.2041635513305664 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19327112 flow loss 0.067380995 occ loss 0.1251332 time for this batch 0.2885456085205078 ---------------------------------- train loss for this epoch: 0.164777
time for this epoch 36.116127252578735 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 137 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18838324 flow loss 0.06526476 occ loss 0.12208693 time for this batch 0.25025320053100586 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16050997 flow loss 0.05995745 occ loss 0.09965484 time for this batch 0.2805347442626953 ---------------------------------- train loss for this epoch: 0.166455
time for this epoch 36.00494861602783 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 138 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13365744 flow loss 0.057081107 occ loss 0.0759894 time for this batch 0.26805925369262695 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17442197 flow loss 0.065109305 occ loss 0.10826822 time for this batch 0.29799890518188477 ---------------------------------- train loss for this epoch: 0.162944
time for this epoch 36.234371185302734 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 139 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14453746 flow loss 0.05975117 occ loss 0.08396948 time for this batch 0.2545480728149414 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18015681 flow loss 0.06515313 occ loss 0.11424621 time for this batch 0.2914907932281494 ---------------------------------- train loss for this epoch: 0.164454
time for this epoch 35.85138177871704 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 140 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14195777 flow loss 0.059396025 occ loss 0.0814118 time for this batch 0.2241818904876709 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.12662634 flow loss 0.05306501 occ loss 0.07276199 time for this batch 0.295123815536499 ---------------------------------- train loss for this epoch: 0.164236
time for this epoch 36.283220052719116 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 141 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16138643 flow loss 0.061501015 occ loss 0.09922265 time for this batch 0.26515865325927734 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16881907 flow loss 0.06487782 occ loss 0.10320298 time for this batch 0.2851681709289551 ---------------------------------- train loss for this epoch: 0.162434
time for this epoch 36.55703520774841 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 142 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1547023 flow loss 0.058712777 occ loss 0.095323026 time for this batch 0.25592827796936035 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18592538 flow loss 0.06794815 occ loss 0.116724014 time for this batch 0.2920231819152832 ---------------------------------- train loss for this epoch: 0.164301
time for this epoch 36.09674620628357 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 143 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1450916 flow loss 0.059892073 occ loss 0.08434885 time for this batch 0.25765037536621094 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17056878 flow loss 0.06432403 occ loss 0.105539754 time for this batch 0.29903268814086914 ---------------------------------- train loss for this epoch: 0.162526
time for this epoch 36.021533489227295 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 144 # batch: 96 i_batch: 0.0 the loss for this batch: 0.21400306 flow loss 0.068346344 occ loss 0.14511472 time for this batch 0.2710764408111572 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1588481 flow loss 0.06310648 occ loss 0.09495549 time for this batch 0.30779075622558594 ---------------------------------- train loss for this epoch: 0.162888
time for this epoch 36.1134819984436 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 145 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18575923 flow loss 0.06331387 occ loss 0.121992365 time for this batch 0.25138044357299805 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16570902 flow loss 0.06108936 occ loss 0.10402548 time for this batch 0.2991189956665039 ---------------------------------- train loss for this epoch: 0.164397
time for this epoch 35.776241064071655 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 146 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13487156 flow loss 0.055854507 occ loss 0.077206254 time for this batch 0.21877503395080566 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14876162 flow loss 0.05866957 occ loss 0.08909258 time for this batch 0.300675630569458 ---------------------------------- train loss for this epoch: 0.162583
time for this epoch 36.34780502319336 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 147 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1532793 flow loss 0.057343688 occ loss 0.09507453 time for this batch 0.20897412300109863 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1945971 flow loss 0.06833434 occ loss 0.12564114 time for this batch 0.30044007301330566 ---------------------------------- train loss for this epoch: 0.162201
time for this epoch 36.418190002441406 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 148 # batch: 96 i_batch: 0.0 the loss for this batch: 0.19601624 flow loss 0.06716528 occ loss 0.12820274 time for this batch 0.25798797607421875 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16467153 flow loss 0.06356262 occ loss 0.10040433 time for this batch 0.29102063179016113 ---------------------------------- train loss for this epoch: 0.163255
time for this epoch 36.58564519882202 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 149 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15211977 flow loss 0.056989487 occ loss 0.09447543 time for this batch 0.2594292163848877 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.12770762 flow loss 0.054798875 occ loss 0.072361134 time for this batch 0.29372477531433105 ---------------------------------- train loss for this epoch: 0.16273
time for this epoch 36.15956139564514 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 150 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13788454 flow loss 0.06379525 occ loss 0.07346133 time for this batch 0.26778626441955566 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16940837 flow loss 0.05783142 occ loss 0.11073641 time for this batch 0.2682514190673828 ---------------------------------- train loss for this epoch: 0.15662
time for this epoch 36.560269594192505 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 151 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13520505 flow loss 0.05599338 occ loss 0.078519255 time for this batch 0.2735929489135742 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16274416 flow loss 0.061264355 occ loss 0.10076438 time for this batch 0.27887630462646484 ---------------------------------- train loss for this epoch: 0.153491
time for this epoch 36.28751468658447 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 152 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12726319 flow loss 0.05173375 occ loss 0.07484029 time for this batch 0.26604270935058594 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17634547 flow loss 0.06100433 occ loss 0.114343 time for this batch 0.296708345413208 ---------------------------------- train loss for this epoch: 0.153264
time for this epoch 36.4266197681427 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 153 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15570161 flow loss 0.056850456 occ loss 0.09804322 time for this batch 0.22124457359313965 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16447847 flow loss 0.0605166 occ loss 0.10267027 time for this batch 0.29894256591796875 ---------------------------------- train loss for this epoch: 0.152769
time for this epoch 35.909162759780884 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 154 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14046271 flow loss 0.057771005 occ loss 0.081953324 time for this batch 0.27202844619750977 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16099103 flow loss 0.055576876 occ loss 0.10485298 time for this batch 0.2832646369934082 ---------------------------------- train loss for this epoch: 0.152905
time for this epoch 35.63705587387085 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 155 # batch: 96 i_batch: 0.0 the loss for this batch: 0.10214542 flow loss 0.04948039 occ loss 0.05209579 time for this batch 0.2649726867675781 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1633307 flow loss 0.059093297 occ loss 0.10349911 time for this batch 0.29421544075012207 ---------------------------------- train loss for this epoch: 0.152138
time for this epoch 35.542768478393555 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 156 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16001342 flow loss 0.058796212 occ loss 0.100387745 time for this batch 0.266587495803833 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1663393 flow loss 0.056855183 occ loss 0.10882389 time for this batch 0.3000004291534424 ---------------------------------- train loss for this epoch: 0.151996
time for this epoch 35.83282160758972 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 157 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15057373 flow loss 0.054906603 occ loss 0.094982624 time for this batch 0.2648301124572754 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20232894 flow loss 0.06693005 occ loss 0.1347382 time for this batch 0.2764742374420166 ---------------------------------- train loss for this epoch: 0.151977
time for this epoch 36.30727672576904 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 158 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15180667 flow loss 0.056319684 occ loss 0.09482578 time for this batch 0.2681007385253906 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13946186 flow loss 0.056474905 occ loss 0.08218024 time for this batch 0.30027008056640625 ---------------------------------- train loss for this epoch: 0.151813
time for this epoch 35.29623460769653 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 159 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16296007 flow loss 0.057837028 occ loss 0.10442531 time for this batch 0.2708306312561035 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13768955 flow loss 0.05264592 occ loss 0.08440288 time for this batch 0.3014228343963623 ---------------------------------- train loss for this epoch: 0.151632
time for this epoch 35.6601722240448 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 160 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1406896 flow loss 0.05565457 occ loss 0.083998576 time for this batch 0.2620675563812256 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17966387 flow loss 0.06071053 occ loss 0.11832286 time for this batch 0.3087944984436035 ---------------------------------- train loss for this epoch: 0.151575
time for this epoch 35.05521368980408 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 161 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12369535 flow loss 0.05111572 occ loss 0.071599156 time for this batch 0.2744259834289551 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14398728 flow loss 0.0539183 occ loss 0.08959059 time for this batch 0.2773172855377197 ---------------------------------- train loss for this epoch: 0.152055
time for this epoch 36.03665375709534 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 162 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13523693 flow loss 0.052544214 occ loss 0.082028404 time for this batch 0.27314162254333496 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14475445 flow loss 0.055983353 occ loss 0.08804973 time for this batch 0.30843091011047363 ---------------------------------- train loss for this epoch: 0.151681
time for this epoch 36.689247369766235 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 163 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12454147 flow loss 0.051096443 occ loss 0.0728236 time for this batch 0.2681999206542969 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15612128 flow loss 0.05755154 occ loss 0.09796028 time for this batch 0.2992880344390869 ---------------------------------- train loss for this epoch: 0.151599
time for this epoch 36.065070390701294 No_decrease: 13 ----------------an epoch starts------------------- i_epoch: 164 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16282469 flow loss 0.05785704 occ loss 0.10447369 time for this batch 0.26404762268066406 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16498072 flow loss 0.060133424 occ loss 0.10386911 time for this batch 0.29967761039733887 ---------------------------------- train loss for this epoch: 0.15153
time for this epoch 35.42187261581421 No_decrease: 14 ----------------an epoch starts------------------- i_epoch: 165 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12908065 flow loss 0.052725412 occ loss 0.075535566 time for this batch 0.262225866317749 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15927891 flow loss 0.058527928 occ loss 0.10011309 time for this batch 0.2968783378601074 ---------------------------------- train loss for this epoch: 0.151535
time for this epoch 36.28017783164978 No_decrease: 15 ----------------an epoch starts------------------- i_epoch: 166 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16636567 flow loss 0.056342505 occ loss 0.1093123 time for this batch 0.27455663681030273 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19557306 flow loss 0.06426797 occ loss 0.13063443 time for this batch 0.30594515800476074 ---------------------------------- train loss for this epoch: 0.151632
time for this epoch 34.537835121154785 No_decrease: 16 ----------------an epoch starts------------------- i_epoch: 167 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16845353 flow loss 0.055725556 occ loss 0.11194516 time for this batch 0.25351381301879883 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15966389 flow loss 0.05474414 occ loss 0.10411117 time for this batch 0.2638378143310547 ---------------------------------- train loss for this epoch: 0.151117
time for this epoch 35.91218066215515 No_decrease: 17 ----------------an epoch starts------------------- i_epoch: 168 # batch: 96 i_batch: 0.0 the loss for this batch: 0.10159688 flow loss 0.045155793 occ loss 0.055603415 time for this batch 0.21937203407287598 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13584958 flow loss 0.052537255 occ loss 0.082549885 time for this batch 0.30303049087524414 ---------------------------------- train loss for this epoch: 0.151119
time for this epoch 35.270124197006226 No_decrease: 18 ----------------an epoch starts------------------- i_epoch: 169 # batch: 96 i_batch: 0.0 the loss for this batch: 0.1310682 flow loss 0.05327877 occ loss 0.07710029 time for this batch 0.23630809783935547 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16685642 flow loss 0.06219238 occ loss 0.10412561 time for this batch 0.2972850799560547 ---------------------------------- train loss for this epoch: 0.15121
time for this epoch 36.329983949661255 No_decrease: 19 ----------------an epoch starts------------------- i_epoch: 170 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14618158 flow loss 0.054661352 occ loss 0.09064104 time for this batch 0.2447960376739502 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13900916 flow loss 0.05394424 occ loss 0.08416267 time for this batch 0.25426769256591797 ---------------------------------- train loss for this epoch: 0.151134
time for this epoch 33.09652829170227 No_decrease: 20 ----------------an epoch starts------------------- i_epoch: 171 # batch: 96 i_batch: 0.0 the loss for this batch: 0.14672814 flow loss 0.05546782 occ loss 0.09065321 time for this batch 0.28752708435058594 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.14392416 flow loss 0.053741593 occ loss 0.08963703 time for this batch 0.29972314834594727 ---------------------------------- train loss for this epoch: 0.15115
time for this epoch 35.96700930595398 No_decrease: 21 ----------------an epoch starts------------------- i_epoch: 172 # batch: 96 i_batch: 0.0 the loss for this batch: 0.12118472 flow loss 0.051680434 occ loss 0.06879913 time for this batch 0.26595091819763184 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15967552 flow loss 0.05583629 occ loss 0.10305835 time for this batch 0.30368781089782715 ---------------------------------- train loss for this epoch: 0.150999
time for this epoch 35.658263206481934 No_decrease: 22 ----------------an epoch starts------------------- i_epoch: 173 # batch: 96 i_batch: 0.0 the loss for this batch: 0.15796532 flow loss 0.054670565 occ loss 0.1026516 time for this batch 0.27045655250549316 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17312184 flow loss 0.057314035 occ loss 0.11536534 time for this batch 0.3030970096588135 ---------------------------------- train loss for this epoch: 0.150938
time for this epoch 35.85626769065857 No_decrease: 23 ----------------an epoch starts------------------- i_epoch: 174 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16659436 flow loss 0.058670245 occ loss 0.10717036 time for this batch 0.2614014148712158 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13567214 flow loss 0.04927437 occ loss 0.08596507 time for this batch 0.30046916007995605 ---------------------------------- train loss for this epoch: 0.150973
time for this epoch 36.26486897468567 No_decrease: 24 ----------------an epoch starts------------------- i_epoch: 175 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13541023 flow loss 0.046839867 occ loss 0.08806056 time for this batch 0.24441242218017578 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13458592 flow loss 0.05070469 occ loss 0.08329645 time for this batch 0.299288272857666 ---------------------------------- train loss for this epoch: 0.15094
time for this epoch 35.81133222579956 No_decrease: 25 ----------------an epoch starts------------------- i_epoch: 176 # batch: 96 i_batch: 0.0 the loss for this batch: 0.17525609 flow loss 0.059762873 occ loss 0.11480266 time for this batch 0.26750683784484863 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1493042 flow loss 0.057168875 occ loss 0.09157919 time for this batch 0.2724144458770752 ---------------------------------- train loss for this epoch: 0.150742
time for this epoch 35.885584354400635 No_decrease: 26 ----------------an epoch starts------------------- i_epoch: 177 # batch: 96 i_batch: 0.0 the loss for this batch: 0.11722743 flow loss 0.04808286 occ loss 0.06877148 time for this batch 0.26015424728393555 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15029721 flow loss 0.055439692 occ loss 0.09405122 time for this batch 0.29036831855773926 ---------------------------------- train loss for this epoch: 0.150929
time for this epoch 35.31740617752075 No_decrease: 27 ----------------an epoch starts------------------- i_epoch: 178 # batch: 96 i_batch: 0.0 the loss for this batch: 0.16793163 flow loss 0.06142454 occ loss 0.10597673 time for this batch 0.2633180618286133 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18033652 flow loss 0.061378263 occ loss 0.11846303 time for this batch 0.2859475612640381 ---------------------------------- train loss for this epoch: 0.151056
time for this epoch 35.67820692062378 No_decrease: 28 ----------------an epoch starts------------------- i_epoch: 179 # batch: 96 i_batch: 0.0 the loss for this batch: 0.13348942 flow loss 0.053044997 occ loss 0.08000504 time for this batch 0.25675511360168457 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.116783105 flow loss 0.046292037 occ loss 0.070004895 time for this batch 0.262432336807251 ---------------------------------- train loss for this epoch: 0.150631
time for this epoch 35.55372667312622 No_decrease: 29 ----------------an epoch starts------------------- i_epoch: 180 # batch: 96 i_batch: 0.0 the loss for this batch: 0.18282905 flow loss 0.06148705 occ loss 0.12053191 time for this batch 0.2301464080810547 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13023412 flow loss 0.04752938 occ loss 0.08212631 time for this batch 0.29033350944519043 ---------------------------------- train loss for this epoch: 0.150831
time for this epoch 35.47727584838867 Early stop at the 181-th epoch
def apply_to_vali_test(model, vt, f_o_mean_std):
f = vt["flow"]
f_m = vt["flow_mask"].to(device)
o = vt["occupancy"]
o_m = vt["occupancy_mask"].to(device)
f_mae, f_rmse, o_mae, o_rmse = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
print ("flow_mae", f_mae)
print ("flow_rmse", f_rmse)
print ("occ_mae", o_mae)
print ("occ_rmse", o_rmse)
return f_mae, f_rmse, o_mae, o_rmse
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 43.11435771146357 flow_rmse 70.86455060162734 occ_mae 0.03549291309592307 occ_rmse 0.06980494802059126
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 41.73699983552198 flow_rmse 68.00268418318444 occ_mae 0.031370341692424086 occ_rmse 0.06279942750383234